1   package org.apache.solr.core;
2   
3   /*
4    * Licensed to the Apache Software Foundation (ASF) under one or more
5    * contributor license agreements.  See the NOTICE file distributed with
6    * this work for additional information regarding copyright ownership.
7    * The ASF licenses this file to You under the Apache License, Version 2.0
8    * (the "License"); you may not use this file except in compliance with
9    * the License.  You may obtain a copy of the License at
10   *
11   *     http://www.apache.org/licenses/LICENSE-2.0
12   *
13   * Unless required by applicable law or agreed to in writing, software
14   * distributed under the License is distributed on an "AS IS" BASIS,
15   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16   * See the License for the specific language governing permissions and
17   * limitations under the License.
18   */
19  
20  import java.io.File;
21  import java.util.Collections;
22  import java.util.IdentityHashMap;
23  import java.util.Set;
24  
25  import org.apache.lucene.index.LeafReaderContext;
26  import org.apache.lucene.index.DirectoryReader;
27  import org.apache.solr.SolrTestCaseJ4;
28  import org.apache.solr.search.SolrIndexSearcher;
29  import org.apache.solr.util.RefCounted;
30  import org.junit.AfterClass;
31  import org.junit.BeforeClass;
32  
33  public class TestNRTOpen extends SolrTestCaseJ4 {
34    
35    @BeforeClass
36    public static void beforeClass() throws Exception {
37      // use a filesystem, because we need to create an index, then "start up solr"
38      System.setProperty("solr.directoryFactory", "solr.StandardDirectoryFactory");
39      // and dont delete it initially
40      System.setProperty("solr.test.leavedatadir", "true");
41      // set these so that merges won't break the test
42      System.setProperty("solr.tests.maxBufferedDocs", "100000");
43      System.setProperty("solr.tests.mergePolicy", "org.apache.lucene.index.LogDocMergePolicy");
44      initCore("solrconfig-basic.xml", "schema-minimal.xml");
45      // add a doc
46      assertU(adoc("foo", "bar"));
47      assertU(commit());
48      File myDir = initCoreDataDir;
49      deleteCore();
50      // boot up again over the same index
51      initCoreDataDir = myDir;
52      initCore("solrconfig-basic.xml", "schema-minimal.xml");
53      // startup
54      assertNRT(1);
55    }
56    
57    @AfterClass
58    public static void afterClass() throws Exception {
59      // ensure we clean up after ourselves, this will fire before superclass...
60      System.clearProperty("solr.test.leavedatadir");
61      System.clearProperty("solr.directoryFactory");
62      System.clearProperty("solr.tests.maxBufferedDocs");
63      System.clearProperty("solr.tests.mergePolicy");
64    }
65    
66    public void setUp() throws Exception {
67      super.setUp();
68      // delete all, then add initial doc
69      assertU(delQ("*:*"));
70      assertU(adoc("foo", "bar"));
71      assertU(commit());
72    }
73    
74    public void testReaderIsNRT() {
75      // core reload
76      String core = h.getCore().getName();
77      h.getCoreContainer().reload(core);
78      assertNRT(1);
79      
80      // add a doc and soft commit
81      assertU(adoc("baz", "doc"));
82      assertU(commit("softCommit", "true"));
83      assertNRT(2);
84      
85      // add a doc and hard commit
86      assertU(adoc("bazz", "doc"));
87      assertU(commit());
88      assertNRT(3);
89      
90      // add a doc and core reload
91      assertU(adoc("bazzz", "doc2"));
92      h.getCoreContainer().reload(core);
93      assertNRT(4);
94    }
95    
96    public void testSharedCores() {
97      // clear out any junk
98      assertU(optimize());
99      
100     Set<Object> s1 = getCoreCacheKeys();
101     assertEquals(1, s1.size());
102     
103     // add a doc, will go in a new segment
104     assertU(adoc("baz", "doc"));
105     assertU(commit("softCommit", "true"));
106     
107     Set<Object> s2 = getCoreCacheKeys();
108     assertEquals(2, s2.size());
109     assertTrue(s2.containsAll(s1));
110     
111     // add two docs, will go in a new segment
112     assertU(adoc("foo", "doc"));
113     assertU(adoc("foo2", "doc"));
114     assertU(commit());
115     
116     Set<Object> s3 = getCoreCacheKeys();
117     assertEquals(3, s3.size());
118     assertTrue(s3.containsAll(s2));
119     
120     // delete a doc
121     assertU(delQ("foo2:doc"));
122     assertU(commit());
123     
124     // same cores
125     assertEquals(s3, getCoreCacheKeys());
126   }
127   
128   static void assertNRT(int maxDoc) {
129     RefCounted<SolrIndexSearcher> searcher = h.getCore().getSearcher();
130     try {
131       DirectoryReader ir = searcher.get().getRawReader();
132       assertEquals(maxDoc, ir.maxDoc());
133       assertTrue("expected NRT reader, got: " + ir, ir.toString().contains(":nrt"));
134     } finally {
135       searcher.decref();
136     }
137   }
138   
139   private Set<Object> getCoreCacheKeys() {
140     RefCounted<SolrIndexSearcher> searcher = h.getCore().getSearcher();
141     Set<Object> set = Collections.newSetFromMap(new IdentityHashMap<Object,Boolean>());
142     try {
143       DirectoryReader ir = searcher.get().getRawReader();
144       for (LeafReaderContext context : ir.leaves()) {
145         set.add(context.reader().getCoreCacheKey());
146       }
147     } finally {
148       searcher.decref();
149     }
150     return set;
151   }
152 }